locators.py 51 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302
  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2015 Vinay Sajip.
  4. # Licensed to the Python Software Foundation under a contributor agreement.
  5. # See LICENSE.txt and CONTRIBUTORS.txt.
  6. #
  7. import gzip
  8. from io import BytesIO
  9. import json
  10. import logging
  11. import os
  12. import posixpath
  13. import re
  14. try:
  15. import threading
  16. except ImportError: # pragma: no cover
  17. import dummy_threading as threading
  18. import zlib
  19. from . import DistlibException
  20. from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
  21. queue, quote, unescape, string_types, build_opener,
  22. HTTPRedirectHandler as BaseRedirectHandler, text_type,
  23. Request, HTTPError, URLError)
  24. from .database import Distribution, DistributionPath, make_dist
  25. from .metadata import Metadata, MetadataInvalidError
  26. from .util import (cached_property, parse_credentials, ensure_slash,
  27. split_filename, get_project_data, parse_requirement,
  28. parse_name_and_version, ServerProxy, normalize_name)
  29. from .version import get_scheme, UnsupportedVersionError
  30. from .wheel import Wheel, is_compatible
  31. logger = logging.getLogger(__name__)
  32. HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
  33. CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
  34. HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
  35. DEFAULT_INDEX = 'https://pypi.org/pypi'
  36. def get_all_distribution_names(url=None):
  37. """
  38. Return all distribution names known by an index.
  39. :param url: The URL of the index.
  40. :return: A list of all known distribution names.
  41. """
  42. if url is None:
  43. url = DEFAULT_INDEX
  44. client = ServerProxy(url, timeout=3.0)
  45. try:
  46. return client.list_packages()
  47. finally:
  48. client('close')()
  49. class RedirectHandler(BaseRedirectHandler):
  50. """
  51. A class to work around a bug in some Python 3.2.x releases.
  52. """
  53. # There's a bug in the base version for some 3.2.x
  54. # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
  55. # returns e.g. /abc, it bails because it says the scheme ''
  56. # is bogus, when actually it should use the request's
  57. # URL for the scheme. See Python issue #13696.
  58. def http_error_302(self, req, fp, code, msg, headers):
  59. # Some servers (incorrectly) return multiple Location headers
  60. # (so probably same goes for URI). Use first header.
  61. newurl = None
  62. for key in ('location', 'uri'):
  63. if key in headers:
  64. newurl = headers[key]
  65. break
  66. if newurl is None: # pragma: no cover
  67. return
  68. urlparts = urlparse(newurl)
  69. if urlparts.scheme == '':
  70. newurl = urljoin(req.get_full_url(), newurl)
  71. if hasattr(headers, 'replace_header'):
  72. headers.replace_header(key, newurl)
  73. else:
  74. headers[key] = newurl
  75. return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
  76. headers)
  77. http_error_301 = http_error_303 = http_error_307 = http_error_302
  78. class Locator(object):
  79. """
  80. A base class for locators - things that locate distributions.
  81. """
  82. source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
  83. binary_extensions = ('.egg', '.exe', '.whl')
  84. excluded_extensions = ('.pdf',)
  85. # A list of tags indicating which wheels you want to match. The default
  86. # value of None matches against the tags compatible with the running
  87. # Python. If you want to match other values, set wheel_tags on a locator
  88. # instance to a list of tuples (pyver, abi, arch) which you want to match.
  89. wheel_tags = None
  90. downloadable_extensions = source_extensions + ('.whl',)
  91. def __init__(self, scheme='default'):
  92. """
  93. Initialise an instance.
  94. :param scheme: Because locators look for most recent versions, they
  95. need to know the version scheme to use. This specifies
  96. the current PEP-recommended scheme - use ``'legacy'``
  97. if you need to support existing distributions on PyPI.
  98. """
  99. self._cache = {}
  100. self.scheme = scheme
  101. # Because of bugs in some of the handlers on some of the platforms,
  102. # we use our own opener rather than just using urlopen.
  103. self.opener = build_opener(RedirectHandler())
  104. # If get_project() is called from locate(), the matcher instance
  105. # is set from the requirement passed to locate(). See issue #18 for
  106. # why this can be useful to know.
  107. self.matcher = None
  108. self.errors = queue.Queue()
  109. def get_errors(self):
  110. """
  111. Return any errors which have occurred.
  112. """
  113. result = []
  114. while not self.errors.empty(): # pragma: no cover
  115. try:
  116. e = self.errors.get(False)
  117. result.append(e)
  118. except self.errors.Empty:
  119. continue
  120. self.errors.task_done()
  121. return result
  122. def clear_errors(self):
  123. """
  124. Clear any errors which may have been logged.
  125. """
  126. # Just get the errors and throw them away
  127. self.get_errors()
  128. def clear_cache(self):
  129. self._cache.clear()
  130. def _get_scheme(self):
  131. return self._scheme
  132. def _set_scheme(self, value):
  133. self._scheme = value
  134. scheme = property(_get_scheme, _set_scheme)
  135. def _get_project(self, name):
  136. """
  137. For a given project, get a dictionary mapping available versions to Distribution
  138. instances.
  139. This should be implemented in subclasses.
  140. If called from a locate() request, self.matcher will be set to a
  141. matcher for the requirement to satisfy, otherwise it will be None.
  142. """
  143. raise NotImplementedError('Please implement in the subclass')
  144. def get_distribution_names(self):
  145. """
  146. Return all the distribution names known to this locator.
  147. """
  148. raise NotImplementedError('Please implement in the subclass')
  149. def get_project(self, name):
  150. """
  151. For a given project, get a dictionary mapping available versions to Distribution
  152. instances.
  153. This calls _get_project to do all the work, and just implements a caching layer on top.
  154. """
  155. if self._cache is None: # pragma: no cover
  156. result = self._get_project(name)
  157. elif name in self._cache:
  158. result = self._cache[name]
  159. else:
  160. self.clear_errors()
  161. result = self._get_project(name)
  162. self._cache[name] = result
  163. return result
  164. def score_url(self, url):
  165. """
  166. Give an url a score which can be used to choose preferred URLs
  167. for a given project release.
  168. """
  169. t = urlparse(url)
  170. basename = posixpath.basename(t.path)
  171. compatible = True
  172. is_wheel = basename.endswith('.whl')
  173. is_downloadable = basename.endswith(self.downloadable_extensions)
  174. if is_wheel:
  175. compatible = is_compatible(Wheel(basename), self.wheel_tags)
  176. return (t.scheme == 'https', 'pypi.org' in t.netloc,
  177. is_downloadable, is_wheel, compatible, basename)
  178. def prefer_url(self, url1, url2):
  179. """
  180. Choose one of two URLs where both are candidates for distribution
  181. archives for the same version of a distribution (for example,
  182. .tar.gz vs. zip).
  183. The current implementation favours https:// URLs over http://, archives
  184. from PyPI over those from other locations, wheel compatibility (if a
  185. wheel) and then the archive name.
  186. """
  187. result = url2
  188. if url1:
  189. s1 = self.score_url(url1)
  190. s2 = self.score_url(url2)
  191. if s1 > s2:
  192. result = url1
  193. if result != url2:
  194. logger.debug('Not replacing %r with %r', url1, url2)
  195. else:
  196. logger.debug('Replacing %r with %r', url1, url2)
  197. return result
  198. def split_filename(self, filename, project_name):
  199. """
  200. Attempt to split a filename in project name, version and Python version.
  201. """
  202. return split_filename(filename, project_name)
  203. def convert_url_to_download_info(self, url, project_name):
  204. """
  205. See if a URL is a candidate for a download URL for a project (the URL
  206. has typically been scraped from an HTML page).
  207. If it is, a dictionary is returned with keys "name", "version",
  208. "filename" and "url"; otherwise, None is returned.
  209. """
  210. def same_project(name1, name2):
  211. return normalize_name(name1) == normalize_name(name2)
  212. result = None
  213. scheme, netloc, path, params, query, frag = urlparse(url)
  214. if frag.lower().startswith('egg='): # pragma: no cover
  215. logger.debug('%s: version hint in fragment: %r',
  216. project_name, frag)
  217. m = HASHER_HASH.match(frag)
  218. if m:
  219. algo, digest = m.groups()
  220. else:
  221. algo, digest = None, None
  222. origpath = path
  223. if path and path[-1] == '/': # pragma: no cover
  224. path = path[:-1]
  225. if path.endswith('.whl'):
  226. try:
  227. wheel = Wheel(path)
  228. if not is_compatible(wheel, self.wheel_tags):
  229. logger.debug('Wheel not compatible: %s', path)
  230. else:
  231. if project_name is None:
  232. include = True
  233. else:
  234. include = same_project(wheel.name, project_name)
  235. if include:
  236. result = {
  237. 'name': wheel.name,
  238. 'version': wheel.version,
  239. 'filename': wheel.filename,
  240. 'url': urlunparse((scheme, netloc, origpath,
  241. params, query, '')),
  242. 'python-version': ', '.join(
  243. ['.'.join(list(v[2:])) for v in wheel.pyver]),
  244. }
  245. except Exception as e: # pragma: no cover
  246. logger.warning('invalid path for wheel: %s', path)
  247. elif not path.endswith(self.downloadable_extensions): # pragma: no cover
  248. logger.debug('Not downloadable: %s', path)
  249. else: # downloadable extension
  250. path = filename = posixpath.basename(path)
  251. for ext in self.downloadable_extensions:
  252. if path.endswith(ext):
  253. path = path[:-len(ext)]
  254. t = self.split_filename(path, project_name)
  255. if not t: # pragma: no cover
  256. logger.debug('No match for project/version: %s', path)
  257. else:
  258. name, version, pyver = t
  259. if not project_name or same_project(project_name, name):
  260. result = {
  261. 'name': name,
  262. 'version': version,
  263. 'filename': filename,
  264. 'url': urlunparse((scheme, netloc, origpath,
  265. params, query, '')),
  266. #'packagetype': 'sdist',
  267. }
  268. if pyver: # pragma: no cover
  269. result['python-version'] = pyver
  270. break
  271. if result and algo:
  272. result['%s_digest' % algo] = digest
  273. return result
  274. def _get_digest(self, info):
  275. """
  276. Get a digest from a dictionary by looking at a "digests" dictionary
  277. or keys of the form 'algo_digest'.
  278. Returns a 2-tuple (algo, digest) if found, else None. Currently
  279. looks only for SHA256, then MD5.
  280. """
  281. result = None
  282. if 'digests' in info:
  283. digests = info['digests']
  284. for algo in ('sha256', 'md5'):
  285. if algo in digests:
  286. result = (algo, digests[algo])
  287. break
  288. if not result:
  289. for algo in ('sha256', 'md5'):
  290. key = '%s_digest' % algo
  291. if key in info:
  292. result = (algo, info[key])
  293. break
  294. return result
  295. def _update_version_data(self, result, info):
  296. """
  297. Update a result dictionary (the final result from _get_project) with a
  298. dictionary for a specific version, which typically holds information
  299. gleaned from a filename or URL for an archive for the distribution.
  300. """
  301. name = info.pop('name')
  302. version = info.pop('version')
  303. if version in result:
  304. dist = result[version]
  305. md = dist.metadata
  306. else:
  307. dist = make_dist(name, version, scheme=self.scheme)
  308. md = dist.metadata
  309. dist.digest = digest = self._get_digest(info)
  310. url = info['url']
  311. result['digests'][url] = digest
  312. if md.source_url != info['url']:
  313. md.source_url = self.prefer_url(md.source_url, url)
  314. result['urls'].setdefault(version, set()).add(url)
  315. dist.locator = self
  316. result[version] = dist
  317. def locate(self, requirement, prereleases=False):
  318. """
  319. Find the most recent distribution which matches the given
  320. requirement.
  321. :param requirement: A requirement of the form 'foo (1.0)' or perhaps
  322. 'foo (>= 1.0, < 2.0, != 1.3)'
  323. :param prereleases: If ``True``, allow pre-release versions
  324. to be located. Otherwise, pre-release versions
  325. are not returned.
  326. :return: A :class:`Distribution` instance, or ``None`` if no such
  327. distribution could be located.
  328. """
  329. result = None
  330. r = parse_requirement(requirement)
  331. if r is None: # pragma: no cover
  332. raise DistlibException('Not a valid requirement: %r' % requirement)
  333. scheme = get_scheme(self.scheme)
  334. self.matcher = matcher = scheme.matcher(r.requirement)
  335. logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
  336. versions = self.get_project(r.name)
  337. if len(versions) > 2: # urls and digests keys are present
  338. # sometimes, versions are invalid
  339. slist = []
  340. vcls = matcher.version_class
  341. for k in versions:
  342. if k in ('urls', 'digests'):
  343. continue
  344. try:
  345. if not matcher.match(k):
  346. logger.debug('%s did not match %r', matcher, k)
  347. else:
  348. if prereleases or not vcls(k).is_prerelease:
  349. slist.append(k)
  350. else:
  351. logger.debug('skipping pre-release '
  352. 'version %s of %s', k, matcher.name)
  353. except Exception: # pragma: no cover
  354. logger.warning('error matching %s with %r', matcher, k)
  355. pass # slist.append(k)
  356. if len(slist) > 1:
  357. slist = sorted(slist, key=scheme.key)
  358. if slist:
  359. logger.debug('sorted list: %s', slist)
  360. version = slist[-1]
  361. result = versions[version]
  362. if result:
  363. if r.extras:
  364. result.extras = r.extras
  365. result.download_urls = versions.get('urls', {}).get(version, set())
  366. d = {}
  367. sd = versions.get('digests', {})
  368. for url in result.download_urls:
  369. if url in sd: # pragma: no cover
  370. d[url] = sd[url]
  371. result.digests = d
  372. self.matcher = None
  373. return result
  374. class PyPIRPCLocator(Locator):
  375. """
  376. This locator uses XML-RPC to locate distributions. It therefore
  377. cannot be used with simple mirrors (that only mirror file content).
  378. """
  379. def __init__(self, url, **kwargs):
  380. """
  381. Initialise an instance.
  382. :param url: The URL to use for XML-RPC.
  383. :param kwargs: Passed to the superclass constructor.
  384. """
  385. super(PyPIRPCLocator, self).__init__(**kwargs)
  386. self.base_url = url
  387. self.client = ServerProxy(url, timeout=3.0)
  388. def get_distribution_names(self):
  389. """
  390. Return all the distribution names known to this locator.
  391. """
  392. return set(self.client.list_packages())
  393. def _get_project(self, name):
  394. result = {'urls': {}, 'digests': {}}
  395. versions = self.client.package_releases(name, True)
  396. for v in versions:
  397. urls = self.client.release_urls(name, v)
  398. data = self.client.release_data(name, v)
  399. metadata = Metadata(scheme=self.scheme)
  400. metadata.name = data['name']
  401. metadata.version = data['version']
  402. metadata.license = data.get('license')
  403. metadata.keywords = data.get('keywords', [])
  404. metadata.summary = data.get('summary')
  405. dist = Distribution(metadata)
  406. if urls:
  407. info = urls[0]
  408. metadata.source_url = info['url']
  409. dist.digest = self._get_digest(info)
  410. dist.locator = self
  411. result[v] = dist
  412. for info in urls:
  413. url = info['url']
  414. digest = self._get_digest(info)
  415. result['urls'].setdefault(v, set()).add(url)
  416. result['digests'][url] = digest
  417. return result
  418. class PyPIJSONLocator(Locator):
  419. """
  420. This locator uses PyPI's JSON interface. It's very limited in functionality
  421. and probably not worth using.
  422. """
  423. def __init__(self, url, **kwargs):
  424. super(PyPIJSONLocator, self).__init__(**kwargs)
  425. self.base_url = ensure_slash(url)
  426. def get_distribution_names(self):
  427. """
  428. Return all the distribution names known to this locator.
  429. """
  430. raise NotImplementedError('Not available from this locator')
  431. def _get_project(self, name):
  432. result = {'urls': {}, 'digests': {}}
  433. url = urljoin(self.base_url, '%s/json' % quote(name))
  434. try:
  435. resp = self.opener.open(url)
  436. data = resp.read().decode() # for now
  437. d = json.loads(data)
  438. md = Metadata(scheme=self.scheme)
  439. data = d['info']
  440. md.name = data['name']
  441. md.version = data['version']
  442. md.license = data.get('license')
  443. md.keywords = data.get('keywords', [])
  444. md.summary = data.get('summary')
  445. dist = Distribution(md)
  446. dist.locator = self
  447. urls = d['urls']
  448. result[md.version] = dist
  449. for info in d['urls']:
  450. url = info['url']
  451. dist.download_urls.add(url)
  452. dist.digests[url] = self._get_digest(info)
  453. result['urls'].setdefault(md.version, set()).add(url)
  454. result['digests'][url] = self._get_digest(info)
  455. # Now get other releases
  456. for version, infos in d['releases'].items():
  457. if version == md.version:
  458. continue # already done
  459. omd = Metadata(scheme=self.scheme)
  460. omd.name = md.name
  461. omd.version = version
  462. odist = Distribution(omd)
  463. odist.locator = self
  464. result[version] = odist
  465. for info in infos:
  466. url = info['url']
  467. odist.download_urls.add(url)
  468. odist.digests[url] = self._get_digest(info)
  469. result['urls'].setdefault(version, set()).add(url)
  470. result['digests'][url] = self._get_digest(info)
  471. # for info in urls:
  472. # md.source_url = info['url']
  473. # dist.digest = self._get_digest(info)
  474. # dist.locator = self
  475. # for info in urls:
  476. # url = info['url']
  477. # result['urls'].setdefault(md.version, set()).add(url)
  478. # result['digests'][url] = self._get_digest(info)
  479. except Exception as e:
  480. self.errors.put(text_type(e))
  481. logger.exception('JSON fetch failed: %s', e)
  482. return result
  483. class Page(object):
  484. """
  485. This class represents a scraped HTML page.
  486. """
  487. # The following slightly hairy-looking regex just looks for the contents of
  488. # an anchor link, which has an attribute "href" either immediately preceded
  489. # or immediately followed by a "rel" attribute. The attribute values can be
  490. # declared with double quotes, single quotes or no quotes - which leads to
  491. # the length of the expression.
  492. _href = re.compile("""
  493. (rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)?
  494. href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*))
  495. (\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))?
  496. """, re.I | re.S | re.X)
  497. _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)
  498. def __init__(self, data, url):
  499. """
  500. Initialise an instance with the Unicode page contents and the URL they
  501. came from.
  502. """
  503. self.data = data
  504. self.base_url = self.url = url
  505. m = self._base.search(self.data)
  506. if m:
  507. self.base_url = m.group(1)
  508. _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
  509. @cached_property
  510. def links(self):
  511. """
  512. Return the URLs of all the links on a page together with information
  513. about their "rel" attribute, for determining which ones to treat as
  514. downloads and which ones to queue for further scraping.
  515. """
  516. def clean(url):
  517. "Tidy up an URL."
  518. scheme, netloc, path, params, query, frag = urlparse(url)
  519. return urlunparse((scheme, netloc, quote(path),
  520. params, query, frag))
  521. result = set()
  522. for match in self._href.finditer(self.data):
  523. d = match.groupdict('')
  524. rel = (d['rel1'] or d['rel2'] or d['rel3'] or
  525. d['rel4'] or d['rel5'] or d['rel6'])
  526. url = d['url1'] or d['url2'] or d['url3']
  527. url = urljoin(self.base_url, url)
  528. url = unescape(url)
  529. url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
  530. result.add((url, rel))
  531. # We sort the result, hoping to bring the most recent versions
  532. # to the front
  533. result = sorted(result, key=lambda t: t[0], reverse=True)
  534. return result
  535. class SimpleScrapingLocator(Locator):
  536. """
  537. A locator which scrapes HTML pages to locate downloads for a distribution.
  538. This runs multiple threads to do the I/O; performance is at least as good
  539. as pip's PackageFinder, which works in an analogous fashion.
  540. """
  541. # These are used to deal with various Content-Encoding schemes.
  542. decoders = {
  543. 'deflate': zlib.decompress,
  544. 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
  545. 'none': lambda b: b,
  546. }
  547. def __init__(self, url, timeout=None, num_workers=10, **kwargs):
  548. """
  549. Initialise an instance.
  550. :param url: The root URL to use for scraping.
  551. :param timeout: The timeout, in seconds, to be applied to requests.
  552. This defaults to ``None`` (no timeout specified).
  553. :param num_workers: The number of worker threads you want to do I/O,
  554. This defaults to 10.
  555. :param kwargs: Passed to the superclass.
  556. """
  557. super(SimpleScrapingLocator, self).__init__(**kwargs)
  558. self.base_url = ensure_slash(url)
  559. self.timeout = timeout
  560. self._page_cache = {}
  561. self._seen = set()
  562. self._to_fetch = queue.Queue()
  563. self._bad_hosts = set()
  564. self.skip_externals = False
  565. self.num_workers = num_workers
  566. self._lock = threading.RLock()
  567. # See issue #45: we need to be resilient when the locator is used
  568. # in a thread, e.g. with concurrent.futures. We can't use self._lock
  569. # as it is for coordinating our internal threads - the ones created
  570. # in _prepare_threads.
  571. self._gplock = threading.RLock()
  572. self.platform_check = False # See issue #112
  573. def _prepare_threads(self):
  574. """
  575. Threads are created only when get_project is called, and terminate
  576. before it returns. They are there primarily to parallelise I/O (i.e.
  577. fetching web pages).
  578. """
  579. self._threads = []
  580. for i in range(self.num_workers):
  581. t = threading.Thread(target=self._fetch)
  582. t.setDaemon(True)
  583. t.start()
  584. self._threads.append(t)
  585. def _wait_threads(self):
  586. """
  587. Tell all the threads to terminate (by sending a sentinel value) and
  588. wait for them to do so.
  589. """
  590. # Note that you need two loops, since you can't say which
  591. # thread will get each sentinel
  592. for t in self._threads:
  593. self._to_fetch.put(None) # sentinel
  594. for t in self._threads:
  595. t.join()
  596. self._threads = []
  597. def _get_project(self, name):
  598. result = {'urls': {}, 'digests': {}}
  599. with self._gplock:
  600. self.result = result
  601. self.project_name = name
  602. url = urljoin(self.base_url, '%s/' % quote(name))
  603. self._seen.clear()
  604. self._page_cache.clear()
  605. self._prepare_threads()
  606. try:
  607. logger.debug('Queueing %s', url)
  608. self._to_fetch.put(url)
  609. self._to_fetch.join()
  610. finally:
  611. self._wait_threads()
  612. del self.result
  613. return result
  614. platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
  615. r'win(32|_amd64)|macosx_?\d+)\b', re.I)
  616. def _is_platform_dependent(self, url):
  617. """
  618. Does an URL refer to a platform-specific download?
  619. """
  620. return self.platform_dependent.search(url)
  621. def _process_download(self, url):
  622. """
  623. See if an URL is a suitable download for a project.
  624. If it is, register information in the result dictionary (for
  625. _get_project) about the specific version it's for.
  626. Note that the return value isn't actually used other than as a boolean
  627. value.
  628. """
  629. if self.platform_check and self._is_platform_dependent(url):
  630. info = None
  631. else:
  632. info = self.convert_url_to_download_info(url, self.project_name)
  633. logger.debug('process_download: %s -> %s', url, info)
  634. if info:
  635. with self._lock: # needed because self.result is shared
  636. self._update_version_data(self.result, info)
  637. return info
  638. def _should_queue(self, link, referrer, rel):
  639. """
  640. Determine whether a link URL from a referring page and with a
  641. particular "rel" attribute should be queued for scraping.
  642. """
  643. scheme, netloc, path, _, _, _ = urlparse(link)
  644. if path.endswith(self.source_extensions + self.binary_extensions +
  645. self.excluded_extensions):
  646. result = False
  647. elif self.skip_externals and not link.startswith(self.base_url):
  648. result = False
  649. elif not referrer.startswith(self.base_url):
  650. result = False
  651. elif rel not in ('homepage', 'download'):
  652. result = False
  653. elif scheme not in ('http', 'https', 'ftp'):
  654. result = False
  655. elif self._is_platform_dependent(link):
  656. result = False
  657. else:
  658. host = netloc.split(':', 1)[0]
  659. if host.lower() == 'localhost':
  660. result = False
  661. else:
  662. result = True
  663. logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
  664. referrer, result)
  665. return result
  666. def _fetch(self):
  667. """
  668. Get a URL to fetch from the work queue, get the HTML page, examine its
  669. links for download candidates and candidates for further scraping.
  670. This is a handy method to run in a thread.
  671. """
  672. while True:
  673. url = self._to_fetch.get()
  674. try:
  675. if url:
  676. page = self.get_page(url)
  677. if page is None: # e.g. after an error
  678. continue
  679. for link, rel in page.links:
  680. if link not in self._seen:
  681. try:
  682. self._seen.add(link)
  683. if (not self._process_download(link) and
  684. self._should_queue(link, url, rel)):
  685. logger.debug('Queueing %s from %s', link, url)
  686. self._to_fetch.put(link)
  687. except MetadataInvalidError: # e.g. invalid versions
  688. pass
  689. except Exception as e: # pragma: no cover
  690. self.errors.put(text_type(e))
  691. finally:
  692. # always do this, to avoid hangs :-)
  693. self._to_fetch.task_done()
  694. if not url:
  695. #logger.debug('Sentinel seen, quitting.')
  696. break
  697. def get_page(self, url):
  698. """
  699. Get the HTML for an URL, possibly from an in-memory cache.
  700. XXX TODO Note: this cache is never actually cleared. It's assumed that
  701. the data won't get stale over the lifetime of a locator instance (not
  702. necessarily true for the default_locator).
  703. """
  704. # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
  705. scheme, netloc, path, _, _, _ = urlparse(url)
  706. if scheme == 'file' and os.path.isdir(url2pathname(path)):
  707. url = urljoin(ensure_slash(url), 'index.html')
  708. if url in self._page_cache:
  709. result = self._page_cache[url]
  710. logger.debug('Returning %s from cache: %s', url, result)
  711. else:
  712. host = netloc.split(':', 1)[0]
  713. result = None
  714. if host in self._bad_hosts:
  715. logger.debug('Skipping %s due to bad host %s', url, host)
  716. else:
  717. req = Request(url, headers={'Accept-encoding': 'identity'})
  718. try:
  719. logger.debug('Fetching %s', url)
  720. resp = self.opener.open(req, timeout=self.timeout)
  721. logger.debug('Fetched %s', url)
  722. headers = resp.info()
  723. content_type = headers.get('Content-Type', '')
  724. if HTML_CONTENT_TYPE.match(content_type):
  725. final_url = resp.geturl()
  726. data = resp.read()
  727. encoding = headers.get('Content-Encoding')
  728. if encoding:
  729. decoder = self.decoders[encoding] # fail if not found
  730. data = decoder(data)
  731. encoding = 'utf-8'
  732. m = CHARSET.search(content_type)
  733. if m:
  734. encoding = m.group(1)
  735. try:
  736. data = data.decode(encoding)
  737. except UnicodeError: # pragma: no cover
  738. data = data.decode('latin-1') # fallback
  739. result = Page(data, final_url)
  740. self._page_cache[final_url] = result
  741. except HTTPError as e:
  742. if e.code != 404:
  743. logger.exception('Fetch failed: %s: %s', url, e)
  744. except URLError as e: # pragma: no cover
  745. logger.exception('Fetch failed: %s: %s', url, e)
  746. with self._lock:
  747. self._bad_hosts.add(host)
  748. except Exception as e: # pragma: no cover
  749. logger.exception('Fetch failed: %s: %s', url, e)
  750. finally:
  751. self._page_cache[url] = result # even if None (failure)
  752. return result
  753. _distname_re = re.compile('<a href=[^>]*>([^<]+)<')
  754. def get_distribution_names(self):
  755. """
  756. Return all the distribution names known to this locator.
  757. """
  758. result = set()
  759. page = self.get_page(self.base_url)
  760. if not page:
  761. raise DistlibException('Unable to get %s' % self.base_url)
  762. for match in self._distname_re.finditer(page.data):
  763. result.add(match.group(1))
  764. return result
  765. class DirectoryLocator(Locator):
  766. """
  767. This class locates distributions in a directory tree.
  768. """
  769. def __init__(self, path, **kwargs):
  770. """
  771. Initialise an instance.
  772. :param path: The root of the directory tree to search.
  773. :param kwargs: Passed to the superclass constructor,
  774. except for:
  775. * recursive - if True (the default), subdirectories are
  776. recursed into. If False, only the top-level directory
  777. is searched,
  778. """
  779. self.recursive = kwargs.pop('recursive', True)
  780. super(DirectoryLocator, self).__init__(**kwargs)
  781. path = os.path.abspath(path)
  782. if not os.path.isdir(path): # pragma: no cover
  783. raise DistlibException('Not a directory: %r' % path)
  784. self.base_dir = path
  785. def should_include(self, filename, parent):
  786. """
  787. Should a filename be considered as a candidate for a distribution
  788. archive? As well as the filename, the directory which contains it
  789. is provided, though not used by the current implementation.
  790. """
  791. return filename.endswith(self.downloadable_extensions)
  792. def _get_project(self, name):
  793. result = {'urls': {}, 'digests': {}}
  794. for root, dirs, files in os.walk(self.base_dir):
  795. for fn in files:
  796. if self.should_include(fn, root):
  797. fn = os.path.join(root, fn)
  798. url = urlunparse(('file', '',
  799. pathname2url(os.path.abspath(fn)),
  800. '', '', ''))
  801. info = self.convert_url_to_download_info(url, name)
  802. if info:
  803. self._update_version_data(result, info)
  804. if not self.recursive:
  805. break
  806. return result
  807. def get_distribution_names(self):
  808. """
  809. Return all the distribution names known to this locator.
  810. """
  811. result = set()
  812. for root, dirs, files in os.walk(self.base_dir):
  813. for fn in files:
  814. if self.should_include(fn, root):
  815. fn = os.path.join(root, fn)
  816. url = urlunparse(('file', '',
  817. pathname2url(os.path.abspath(fn)),
  818. '', '', ''))
  819. info = self.convert_url_to_download_info(url, None)
  820. if info:
  821. result.add(info['name'])
  822. if not self.recursive:
  823. break
  824. return result
  825. class JSONLocator(Locator):
  826. """
  827. This locator uses special extended metadata (not available on PyPI) and is
  828. the basis of performant dependency resolution in distlib. Other locators
  829. require archive downloads before dependencies can be determined! As you
  830. might imagine, that can be slow.
  831. """
  832. def get_distribution_names(self):
  833. """
  834. Return all the distribution names known to this locator.
  835. """
  836. raise NotImplementedError('Not available from this locator')
  837. def _get_project(self, name):
  838. result = {'urls': {}, 'digests': {}}
  839. data = get_project_data(name)
  840. if data:
  841. for info in data.get('files', []):
  842. if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
  843. continue
  844. # We don't store summary in project metadata as it makes
  845. # the data bigger for no benefit during dependency
  846. # resolution
  847. dist = make_dist(data['name'], info['version'],
  848. summary=data.get('summary',
  849. 'Placeholder for summary'),
  850. scheme=self.scheme)
  851. md = dist.metadata
  852. md.source_url = info['url']
  853. # TODO SHA256 digest
  854. if 'digest' in info and info['digest']:
  855. dist.digest = ('md5', info['digest'])
  856. md.dependencies = info.get('requirements', {})
  857. dist.exports = info.get('exports', {})
  858. result[dist.version] = dist
  859. result['urls'].setdefault(dist.version, set()).add(info['url'])
  860. return result
  861. class DistPathLocator(Locator):
  862. """
  863. This locator finds installed distributions in a path. It can be useful for
  864. adding to an :class:`AggregatingLocator`.
  865. """
  866. def __init__(self, distpath, **kwargs):
  867. """
  868. Initialise an instance.
  869. :param distpath: A :class:`DistributionPath` instance to search.
  870. """
  871. super(DistPathLocator, self).__init__(**kwargs)
  872. assert isinstance(distpath, DistributionPath)
  873. self.distpath = distpath
  874. def _get_project(self, name):
  875. dist = self.distpath.get_distribution(name)
  876. if dist is None:
  877. result = {'urls': {}, 'digests': {}}
  878. else:
  879. result = {
  880. dist.version: dist,
  881. 'urls': {dist.version: set([dist.source_url])},
  882. 'digests': {dist.version: set([None])}
  883. }
  884. return result
  885. class AggregatingLocator(Locator):
  886. """
  887. This class allows you to chain and/or merge a list of locators.
  888. """
  889. def __init__(self, *locators, **kwargs):
  890. """
  891. Initialise an instance.
  892. :param locators: The list of locators to search.
  893. :param kwargs: Passed to the superclass constructor,
  894. except for:
  895. * merge - if False (the default), the first successful
  896. search from any of the locators is returned. If True,
  897. the results from all locators are merged (this can be
  898. slow).
  899. """
  900. self.merge = kwargs.pop('merge', False)
  901. self.locators = locators
  902. super(AggregatingLocator, self).__init__(**kwargs)
  903. def clear_cache(self):
  904. super(AggregatingLocator, self).clear_cache()
  905. for locator in self.locators:
  906. locator.clear_cache()
  907. def _set_scheme(self, value):
  908. self._scheme = value
  909. for locator in self.locators:
  910. locator.scheme = value
  911. scheme = property(Locator.scheme.fget, _set_scheme)
  912. def _get_project(self, name):
  913. result = {}
  914. for locator in self.locators:
  915. d = locator.get_project(name)
  916. if d:
  917. if self.merge:
  918. files = result.get('urls', {})
  919. digests = result.get('digests', {})
  920. # next line could overwrite result['urls'], result['digests']
  921. result.update(d)
  922. df = result.get('urls')
  923. if files and df:
  924. for k, v in files.items():
  925. if k in df:
  926. df[k] |= v
  927. else:
  928. df[k] = v
  929. dd = result.get('digests')
  930. if digests and dd:
  931. dd.update(digests)
  932. else:
  933. # See issue #18. If any dists are found and we're looking
  934. # for specific constraints, we only return something if
  935. # a match is found. For example, if a DirectoryLocator
  936. # returns just foo (1.0) while we're looking for
  937. # foo (>= 2.0), we'll pretend there was nothing there so
  938. # that subsequent locators can be queried. Otherwise we
  939. # would just return foo (1.0) which would then lead to a
  940. # failure to find foo (>= 2.0), because other locators
  941. # weren't searched. Note that this only matters when
  942. # merge=False.
  943. if self.matcher is None:
  944. found = True
  945. else:
  946. found = False
  947. for k in d:
  948. if self.matcher.match(k):
  949. found = True
  950. break
  951. if found:
  952. result = d
  953. break
  954. return result
  955. def get_distribution_names(self):
  956. """
  957. Return all the distribution names known to this locator.
  958. """
  959. result = set()
  960. for locator in self.locators:
  961. try:
  962. result |= locator.get_distribution_names()
  963. except NotImplementedError:
  964. pass
  965. return result
  966. # We use a legacy scheme simply because most of the dists on PyPI use legacy
  967. # versions which don't conform to PEP 426 / PEP 440.
  968. default_locator = AggregatingLocator(
  969. JSONLocator(),
  970. SimpleScrapingLocator('https://pypi.org/simple/',
  971. timeout=3.0),
  972. scheme='legacy')
  973. locate = default_locator.locate
  974. NAME_VERSION_RE = re.compile(r'(?P<name>[\w-]+)\s*'
  975. r'\(\s*(==\s*)?(?P<ver>[^)]+)\)$')
  976. class DependencyFinder(object):
  977. """
  978. Locate dependencies for distributions.
  979. """
  980. def __init__(self, locator=None):
  981. """
  982. Initialise an instance, using the specified locator
  983. to locate distributions.
  984. """
  985. self.locator = locator or default_locator
  986. self.scheme = get_scheme(self.locator.scheme)
  987. def add_distribution(self, dist):
  988. """
  989. Add a distribution to the finder. This will update internal information
  990. about who provides what.
  991. :param dist: The distribution to add.
  992. """
  993. logger.debug('adding distribution %s', dist)
  994. name = dist.key
  995. self.dists_by_name[name] = dist
  996. self.dists[(name, dist.version)] = dist
  997. for p in dist.provides:
  998. name, version = parse_name_and_version(p)
  999. logger.debug('Add to provided: %s, %s, %s', name, version, dist)
  1000. self.provided.setdefault(name, set()).add((version, dist))
  1001. def remove_distribution(self, dist):
  1002. """
  1003. Remove a distribution from the finder. This will update internal
  1004. information about who provides what.
  1005. :param dist: The distribution to remove.
  1006. """
  1007. logger.debug('removing distribution %s', dist)
  1008. name = dist.key
  1009. del self.dists_by_name[name]
  1010. del self.dists[(name, dist.version)]
  1011. for p in dist.provides:
  1012. name, version = parse_name_and_version(p)
  1013. logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
  1014. s = self.provided[name]
  1015. s.remove((version, dist))
  1016. if not s:
  1017. del self.provided[name]
  1018. def get_matcher(self, reqt):
  1019. """
  1020. Get a version matcher for a requirement.
  1021. :param reqt: The requirement
  1022. :type reqt: str
  1023. :return: A version matcher (an instance of
  1024. :class:`distlib.version.Matcher`).
  1025. """
  1026. try:
  1027. matcher = self.scheme.matcher(reqt)
  1028. except UnsupportedVersionError: # pragma: no cover
  1029. # XXX compat-mode if cannot read the version
  1030. name = reqt.split()[0]
  1031. matcher = self.scheme.matcher(name)
  1032. return matcher
  1033. def find_providers(self, reqt):
  1034. """
  1035. Find the distributions which can fulfill a requirement.
  1036. :param reqt: The requirement.
  1037. :type reqt: str
  1038. :return: A set of distribution which can fulfill the requirement.
  1039. """
  1040. matcher = self.get_matcher(reqt)
  1041. name = matcher.key # case-insensitive
  1042. result = set()
  1043. provided = self.provided
  1044. if name in provided:
  1045. for version, provider in provided[name]:
  1046. try:
  1047. match = matcher.match(version)
  1048. except UnsupportedVersionError:
  1049. match = False
  1050. if match:
  1051. result.add(provider)
  1052. break
  1053. return result
  1054. def try_to_replace(self, provider, other, problems):
  1055. """
  1056. Attempt to replace one provider with another. This is typically used
  1057. when resolving dependencies from multiple sources, e.g. A requires
  1058. (B >= 1.0) while C requires (B >= 1.1).
  1059. For successful replacement, ``provider`` must meet all the requirements
  1060. which ``other`` fulfills.
  1061. :param provider: The provider we are trying to replace with.
  1062. :param other: The provider we're trying to replace.
  1063. :param problems: If False is returned, this will contain what
  1064. problems prevented replacement. This is currently
  1065. a tuple of the literal string 'cantreplace',
  1066. ``provider``, ``other`` and the set of requirements
  1067. that ``provider`` couldn't fulfill.
  1068. :return: True if we can replace ``other`` with ``provider``, else
  1069. False.
  1070. """
  1071. rlist = self.reqts[other]
  1072. unmatched = set()
  1073. for s in rlist:
  1074. matcher = self.get_matcher(s)
  1075. if not matcher.match(provider.version):
  1076. unmatched.add(s)
  1077. if unmatched:
  1078. # can't replace other with provider
  1079. problems.add(('cantreplace', provider, other,
  1080. frozenset(unmatched)))
  1081. result = False
  1082. else:
  1083. # can replace other with provider
  1084. self.remove_distribution(other)
  1085. del self.reqts[other]
  1086. for s in rlist:
  1087. self.reqts.setdefault(provider, set()).add(s)
  1088. self.add_distribution(provider)
  1089. result = True
  1090. return result
  1091. def find(self, requirement, meta_extras=None, prereleases=False):
  1092. """
  1093. Find a distribution and all distributions it depends on.
  1094. :param requirement: The requirement specifying the distribution to
  1095. find, or a Distribution instance.
  1096. :param meta_extras: A list of meta extras such as :test:, :build: and
  1097. so on.
  1098. :param prereleases: If ``True``, allow pre-release versions to be
  1099. returned - otherwise, don't return prereleases
  1100. unless they're all that's available.
  1101. Return a set of :class:`Distribution` instances and a set of
  1102. problems.
  1103. The distributions returned should be such that they have the
  1104. :attr:`required` attribute set to ``True`` if they were
  1105. from the ``requirement`` passed to ``find()``, and they have the
  1106. :attr:`build_time_dependency` attribute set to ``True`` unless they
  1107. are post-installation dependencies of the ``requirement``.
  1108. The problems should be a tuple consisting of the string
  1109. ``'unsatisfied'`` and the requirement which couldn't be satisfied
  1110. by any distribution known to the locator.
  1111. """
  1112. self.provided = {}
  1113. self.dists = {}
  1114. self.dists_by_name = {}
  1115. self.reqts = {}
  1116. meta_extras = set(meta_extras or [])
  1117. if ':*:' in meta_extras:
  1118. meta_extras.remove(':*:')
  1119. # :meta: and :run: are implicitly included
  1120. meta_extras |= set([':test:', ':build:', ':dev:'])
  1121. if isinstance(requirement, Distribution):
  1122. dist = odist = requirement
  1123. logger.debug('passed %s as requirement', odist)
  1124. else:
  1125. dist = odist = self.locator.locate(requirement,
  1126. prereleases=prereleases)
  1127. if dist is None:
  1128. raise DistlibException('Unable to locate %r' % requirement)
  1129. logger.debug('located %s', odist)
  1130. dist.requested = True
  1131. problems = set()
  1132. todo = set([dist])
  1133. install_dists = set([odist])
  1134. while todo:
  1135. dist = todo.pop()
  1136. name = dist.key # case-insensitive
  1137. if name not in self.dists_by_name:
  1138. self.add_distribution(dist)
  1139. else:
  1140. #import pdb; pdb.set_trace()
  1141. other = self.dists_by_name[name]
  1142. if other != dist:
  1143. self.try_to_replace(dist, other, problems)
  1144. ireqts = dist.run_requires | dist.meta_requires
  1145. sreqts = dist.build_requires
  1146. ereqts = set()
  1147. if meta_extras and dist in install_dists:
  1148. for key in ('test', 'build', 'dev'):
  1149. e = ':%s:' % key
  1150. if e in meta_extras:
  1151. ereqts |= getattr(dist, '%s_requires' % key)
  1152. all_reqts = ireqts | sreqts | ereqts
  1153. for r in all_reqts:
  1154. providers = self.find_providers(r)
  1155. if not providers:
  1156. logger.debug('No providers found for %r', r)
  1157. provider = self.locator.locate(r, prereleases=prereleases)
  1158. # If no provider is found and we didn't consider
  1159. # prereleases, consider them now.
  1160. if provider is None and not prereleases:
  1161. provider = self.locator.locate(r, prereleases=True)
  1162. if provider is None:
  1163. logger.debug('Cannot satisfy %r', r)
  1164. problems.add(('unsatisfied', r))
  1165. else:
  1166. n, v = provider.key, provider.version
  1167. if (n, v) not in self.dists:
  1168. todo.add(provider)
  1169. providers.add(provider)
  1170. if r in ireqts and dist in install_dists:
  1171. install_dists.add(provider)
  1172. logger.debug('Adding %s to install_dists',
  1173. provider.name_and_version)
  1174. for p in providers:
  1175. name = p.key
  1176. if name not in self.dists_by_name:
  1177. self.reqts.setdefault(p, set()).add(r)
  1178. else:
  1179. other = self.dists_by_name[name]
  1180. if other != p:
  1181. # see if other can be replaced by p
  1182. self.try_to_replace(p, other, problems)
  1183. dists = set(self.dists.values())
  1184. for dist in dists:
  1185. dist.build_time_dependency = dist not in install_dists
  1186. if dist.build_time_dependency:
  1187. logger.debug('%s is a build-time dependency only.',
  1188. dist.name_and_version)
  1189. logger.debug('find done for %s', odist)
  1190. return dists, problems